FaceNN : Face detection, feature, gender, age, emotion recognition
The facenn
module provides face detection and recognition function.
User can use the following code to import the facenn
module.
var facenn = require('facenn');
Support
The following shows facenn
module APIs available for each permissions.
User Mode | Privilege Mode | |
---|---|---|
facenn.detect | ● | ● |
facenn.feature | ● | ● |
facenn.compare | ● | ● |
facenn Object
facenn.detect(videoBuf, attribute[, quick])
videoBuf
{Buffer} Video buffer.attribute
{Object} Video attribute.quick
{Boolean} Does quick mode?- Returns: {Array} Face info objects array which detectd.
Detect face infos in given video buffer.
The video attribute attribute
object contains the following members:
width
{Integer} Video width.height
{Integer} Video height.pixelFormat
{Integer} Pixel format.
pixelFormat
is a integer, can be:
Value | Description |
---|---|
facenn.PIX_FMT_RGB24 | RGB24 pixel format. |
facenn.PIX_FMT_BGR2RGB24 | BGR24 to RBG24 pixel format. |
facenn.PIX_FMT_GRAY2RGB24 | Grayscale to RGB24 pixel format. |
facenn.PIX_FMT_RGBA2RGB24 | RGBA to RGB24 pixel format. |
Each object item of the returned array contains the following members:
score
{Number} The probability that this area is a face.x0
{Integer} x position of upper left corner.y0
{Integer} y position of upper left corner.x1
{Integer} x position of lower right corner.y1
{Integer} y position of lower right corner.area
{Number} Area, non-quick mode only.regreCoord
{Array} RegreCoord, non-quick mode only.landmark
{Array} Landmark, non-quick mode only.
facenn.feature(videoBuf, attribute, faceInfo[, extra])
videoBuf
{Buffer} Video buffer.attribute
{Object} Video attribute.faceInfo
{Object} Face info.extra
{Object} Does need extra face key? default: undefined.- Returns: {Object} Face feature.
Get the feature of given face info.
The returned face feature object contains the following members:
keys
{Array} Face keys.male
{Boolean} Gender, when extra.male istrue
.age
{Integer} Age, when extra.age istrue
.emotion
{String} Emotion, when extra.emotion istrue
.live
{Number} Living probability, when extra.live istrue
.
emotion
is a string, can be: 'angry'
, 'disgust'
, 'fear'
, 'happy'
, 'sad'
, 'surprise'
, 'neutral'
.
live
represents the probability that this avatar is alive, 0.0 ~ 1.0
.
facenn.compare(faceKeys1, faceKeys2)
faceKeys1
{Object} Face keys 1.faceKeys2
{Object} Face keys 2.- Returns: {Number} The similarity between two face.
Compare the similarity between two face, return value range 0.0 ~ 1.0
.
Example
This example show how to detect face.
var MediaDecoder = require('mediadecoder');
var iosched = require('iosched');
var facenn = require('facenn');
var netcam = new MediaDecoder().open('rtsp://admin:admin@10.4.0.12');
netcam.destVideoFormat({width: 640, height: 360, fps: 1, pixelFormat: MediaDecoder.PIX_FMT_RGB24, noDrop: false, disable: false});
netcam.destAudioFormat({disable: true});
netcam.previewFormat({enable: true, fb: 0, fps: 25});
var quited = false;
var ol = netcam.overlay();
netcam.on('video', (video) => {
ol.clear();
var buf = new Buffer(video.arrayBuffer);
var faceInfo = facenn.detect(buf, {width: 640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24});
if (faceInfo.length) {
for (var i = 0; i < faceInfo.length; i++) {
ol.rect(faceInfo[i].x0, faceInfo[i].y0, faceInfo[i].x1, faceInfo[i].y1, MediaDecoder.C_YELLOW, 1, 0, false);
}
}
});
netcam.on('eof', () => {
quited = true;
});
netcam.start();
while (!quited) {
iosched.poll(); // Event poll.
}
netcam.close();
This example show how to detect face and get face feature.
var MediaDecoder = require('mediadecoder');
var iosched = require('iosched');
var facenn = require('facenn');
var netcam = new MediaDecoder().open('rtsp://admin:admin@10.4.0.12');
netcam.destVideoFormat({width: 640, height: 360, fps: 1, pixelFormat: MediaDecoder.PIX_FMT_RGB24, noDrop: false, disable: false});
netcam.destAudioFormat({disable: true});
netcam.previewFormat({enable: true, fb: 0, fps: 25});
var quited = false;
var ol = netcam.overlay();
var extra = { male: true, age: true, emotion: true };
netcam.on('video', (video) => {
var buf = new Buffer(video.arrayBuffer);
var faceInfo = facenn.detect(buf, {width: 640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24});
var color = MediaDecoder.C_RED;
var hasClear = false;
var ageGenderEmotion = '';
if (faceInfo.length) {
ol.font(ol.F8X12);
for (var i = 0; i < faceInfo.length; i++) {
var feature = facenn.feature(buf, {width:640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24}, faceInfo[i], extra);
if (feature) {
if (feature.male) {
color = 0x1E90FF;
} else {
color = 0xFF60FF;
}
if (feature.male) {
ageGenderEmotion = ' Male ' + feature.age + ' ' + feature.emotion;
} else {
ageGenderEmotion = ' Female ' + feature.age + ' ' + feature.emotion;
}
if (!hasClear) {
hasClear = true;
ol.clear();
}
var x0 = Math.max(faceInfo[i].x0 - 10, 0);
var x1 = Math.min(faceInfo[i].x1 + 10, 640 - 1);
var y0 = Math.max(faceInfo[i].y0 - 10, 0);
var y1 = Math.min(faceInfo[i].y1, 360 - 1);
ol.rect(x0, y0, x1, y1, color, 10, 1, false);
ol.text(x0 + 12, y0 - 14, ageGenderEmotion, color);
}
}
}
if (!hasClear) {
ol.clear();
}
});
netcam.on('eof', () => {
quited = true;
});
netcam.start();
while (!quited) {
iosched.poll(); // Event poll.
}
netcam.close();
This example show how to get face feature and store face keys to file.
var MediaDecoder = require('mediadecoder');
var iosched = require('iosched');
var facenn = require('facenn');
var fs = require('fs');
var netcam = new MediaDecoder().open('rtsp://admin:admin@10.4.0.12');
netcam.destVideoFormat({width: 640, height: 360, fps: 1, pixelFormat: MediaDecoder.PIX_FMT_RGB24, noDrop: false, disable: false});
netcam.destAudioFormat({disable: true});
netcam.previewFormat({enable: true, fb: 0, fps: 25});
var quited = false;
var extra = { male: true, age: true, emotion: true };
var ol = netcam.overlay();
var cnt = 5;
netcam.on('video', (video) => {
var buf = new Buffer(video.arrayBuffer);
var faceInfo = facenn.detect(buf, {width: 640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24});
var color = MediaDecoder.C_RED;
var hasClear = false;
var ageGenderEmotion = '';
if (faceInfo.length) {
ol.font(ol.F8X12);
for (var i = 0; i < faceInfo.length; i++) {
var feature = facenn.feature(buf, {width:640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24}, faceInfo[i], extra);
if (feature) {
if (feature.male) {
color = 0x1E90FF;
} else {
color = 0xFF60FF;
}
if (feature.male) {
ageGenderEmotion = ' Male ' + feature.age + ' ' + feature.emotion;
} else {
ageGenderEmotion = ' Female ' + feature.age + ' ' + feature.emotion;
}
if (!hasClear) {
hasClear = true;
ol.clear();
}
var x0 = Math.max(faceInfo[i].x0 - 10, 0);
var x1 = Math.min(faceInfo[i].x1 + 10, 640 - 1);
var y0 = Math.max(faceInfo[i].y0 - 10, 0);
var y1 = Math.min(faceInfo[i].y1, 360 - 1);
ol.rect(x0, y0, x1, y1, color, 10, 1, false);
ol.text(x0 + 12, y0 - 14, ageGenderEmotion, color);
console.log(cnt);
if (--cnt == 0) {
fs.writeFile('./engineer.face', JSON.stringify(feature.keys));
quited = true;
}
}
}
}
if (!hasClear) {
ol.clear();
}
});
netcam.on('eof', () => {
quited = true;
});
netcam.start();
while (!quited) {
iosched.poll(); // Event poll.
}
netcam.close();
This example show how to get face feature and compare with a exist face keys file.
var MediaDecoder = require('mediadecoder');
var iosched = require('iosched');
var facenn = require('facenn');
var fs = require('fs');
var netcam = new MediaDecoder().open('rtsp://admin:admin@10.4.0.12');
netcam.destVideoFormat({width: 640, height: 360, fps: 1, pixelFormat: MediaDecoder.PIX_FMT_RGB24, noDrop: false, disable: false});
netcam.destAudioFormat({disable: true});
netcam.previewFormat({enable: true, fb: 0, fps: 25});
var quited = false;
var threshold = 0.6;
var extra = { male: true, age: true, emotion: true };
var ol = netcam.overlay();
var engineerFace = JSON.parse(fs.readString('./engineer.face'));
netcam.on('video', (video) => {
var buf = new Buffer(video.arrayBuffer);
var faceInfo = facenn.detect(buf, {width: 640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24});
var color = MediaDecoder.C_RED;
var text = 'Unknown!';
var hasClear = false;
var ageGenderEmotion = '';
if (faceInfo.length) {
ol.font(ol.F8X12);
for (var i = 0; i < faceInfo.length; i++) {
var feature = facenn.feature(buf, {width:640, height: 360, pixelFormat: facenn.PIX_FMT_RGB24}, faceInfo[i], extra);
if (feature) {
if (facenn.compare(engineerFace, feature.keys) >= threshold) {
color = MediaDecoder.C_YELLOW;
text = 'Engineer';
} else {
if (feature.male) {
color = 0x1E90FF;
} else {
color = 0xFF60FF;
}
text = 'Unknown!';
}
if (feature.male) {
ageGenderEmotion = ' Male ' + feature.age + ' ' + feature.emotion;
} else {
ageGenderEmotion = ' Female ' + feature.age + ' ' + feature.emotion;
}
if (!hasClear) {
hasClear = true;
ol.clear();
}
var x0 = Math.max(faceInfo[i].x0 - 10, 0);
var x1 = Math.min(faceInfo[i].x1 + 10, 640 - 1);
var y0 = Math.max(faceInfo[i].y0 - 10, 0);
var y1 = Math.min(faceInfo[i].y1, 360 - 1);
ol.rect(x0, y0, x1, y1, color, 10, 1, false);
ol.text(x0 + 12, y0 - 14, text + ageGenderEmotion, color);
}
}
}
if (!hasClear) {
ol.clear();
}
});
netcam.on('eof', () => {
quited = true;
});
netcam.start();
while (!quited) {
iosched.poll(); // Event poll.
}
netcam.close();